Skip to contents

Example script demonstrating how to use the dismapr package for retrieving interpolated depth-weighted biomass data from NOAA’s DisMAP.

# Load required packages
library(arcgis)
library(dismapr) # source(here::here("R/dismapr.R")) # devtools::load_all() # devtools::install_local()
library(dplyr)
library(DT)
library(ggplot2)
library(glue)
library(here)
library(httr2)
library(jsonlite)
library(purrr)
library(sf)
library(terra)

1. Indicators

Example 1: Get indicators data for American Lobster in Northeast US Spring

tbl_lobster_indicators <- get_dismap_indicators(
  common_name = "American lobster", 
  region      = "Northeast US",
  season      = "Spring")
tbl_lobster_indicators
#> # A tibble: 46 × 27
#>    objectid dataset_code region       season date_code species       common_name
#>       <int> <chr>        <chr>        <chr>  <chr>     <chr>         <chr>      
#>  1     9866 NEUS_SPR     Northeast US Spring 20240701  Homarus amer… American l…
#>  2     9867 NEUS_SPR     Northeast US Spring 20240701  Homarus amer… American l…
#>  3     9868 NEUS_SPR     Northeast US Spring 20240701  Homarus amer… American l…
#>  4     9869 NEUS_SPR     Northeast US Spring 20240701  Homarus amer… American l…
#>  5     9870 NEUS_SPR     Northeast US Spring 20240701  Homarus amer… American l…
#>  6     9871 NEUS_SPR     Northeast US Spring 20240701  Homarus amer… American l…
#>  7     9872 NEUS_SPR     Northeast US Spring 20240701  Homarus amer… American l…
#>  8     9873 NEUS_SPR     Northeast US Spring 20240701  Homarus amer… American l…
#>  9     9874 NEUS_SPR     Northeast US Spring 20240701  Homarus amer… American l…
#> 10     9875 NEUS_SPR     Northeast US Spring 20240701  Homarus amer… American l…
#> # ℹ 36 more rows
#> # ℹ 20 more variables: core_species <chr>, year <int>,
#> #   distribution_project_name <chr>, distribution_project_code <chr>,
#> #   summary_product <chr>, center_of_gravity_latitude <dbl>,
#> #   minimum_latitude <dbl>, maximum_latitude <dbl>, offset_latitude <dbl>,
#> #   center_of_gravity_latitude_se <dbl>, center_of_gravity_longitude <dbl>,
#> #   minimum_longitude <dbl>, maximum_longitude <dbl>, offset_longitude <dbl>, …

# Plot Center of Gravity changes over time
ggplot(data = tbl_lobster_indicators) +
  geom_point(aes(
    x     = center_of_gravity_longitude, 
    y     = center_of_gravity_latitude, 
    color = year)) +
  theme_classic() +
  labs(y = "Latitude", x = "Longitude", title = "American Lobster Center of Gravity") +
  theme(panel.border = element_rect(colour = "black", fill = NA, linewidth = 1)) +
  coord_quickmap(xlim = c(-80, -65), ylim = c(33, 45))

2. BioMass Rasters

Example 2: Download interpolated biomass rasters for Summer Flounder in NEUS Spring

# Set up parameters for downloading
species_scientific <- "Paralichthys dentatus"
species_common     <- "SummerFlounder"
region_season      <- "NEUS_SPR"

# TODO: setup below as function
base_url <- "https://maps.fisheries.noaa.gov/image/rest/services/DisMAP"
img_url  <- glue("{base_url}/{region_season}_IDW_CURRENT/ImageServer")

lst_slices <- request(base_url) |> 
  req_url_path_append(glue("{region_season}_IDW_CURRENT/ImageServer/slices")) |> 
  req_url_query(
    multidimensionalDefinition = glue("[{{variableName: '{species_scientific}'}}]"),
    f = "json") |> 
  req_perform() |>
  resp_body_json() |> 
  pluck("slices")

tbl_slices <- tibble(
  sliceId       = map_int(lst_slices, "sliceId"),
  dimensionName = map_chr(lst_slices, \(x) x[["multidimensionalDefinition"]][[1]][["dimensionName"]]),
  value         = map_dbl(lst_slices, \(x) x[["multidimensionalDefinition"]][[1]][["values"]][[1]])) |> 
  arrange(dimensionName, value)

if (all(tbl_slices$dimensionName == "StdTime")){
  tbl_slices <- tbl_slices |> 
    mutate(
      dtime = from_esri_date(value),
      year  = lubridate::year(dtime))
  
  # ensure no duplicate years
  # (otherwise dtime varying by something other than year)
  stopifnot(
    tbl_slices |>
      group_by(year) |>
      summarize(n = n()) |>
      filter(n > 1) |> 
      nrow() == 0)
}

# Download just a few years for demonstration
years_to_download <- 2015:2019
slice_ids <- tbl_slices |> 
  filter(year %in% years_to_download) |> 
  pull(sliceId)

# Create directory for downloads
output_dir <- here("inst/test")
dir.create(output_dir, showWarnings = FALSE)

# Download each raster
raster_files <- character(length(slice_ids))
for (i in seq_along(slice_ids)) { # i = 1
  slice_id <- slice_ids[i]
  yr       <- tbl_slices |> 
    filter(sliceId == slice_id) |> 
    pull(year)
  # file_name <- paste0(species_common, "_", year, "_", slice_id, ".grd")
  # file_path <- file.path(output_dir, file_name)
  # tif <- glue("{output_dir}/{species_common}_yr{yr}_sl{slice_id}.tif")
  tif <- glue("{output_dir}/{region_season}_IDW_{species_common}_{yr}.tif")
  message(glue("Downloading raster {basename(tif)}"))
  
  raster_files[i] <- tif
  
  r <- download_dismap_raster(
    slice_id  = slice_id,
    img_url   = img_url,
    out_tif   = tif,
    verbose   = F,
    overwrite = F)
  # plet(r)
}

r <- rast(glue("{output_dir}/{region_season}_IDW_{species_common}_{yr}.tif"))
r
#> class       : SpatRaster 
#> dimensions  : 400, 400, 1  (nrow, ncol, nlyr)
#> resolution  : 2660, 2660  (x, y)
#> extent      : 321350, 1385350, 3888846, 4952846  (xmin, xmax, ymin, ymax)
#> coord. ref. : NAD83(2011) / UTM zone 18N (EPSG:6347) 
#> source      : NEUS_SPR_IDW_SummerFlounder_2019.tif 
#> name        :   wtcpue 
#> min value   : 0.000000 
#> max value   : 4.181736

plot interactive, quick

plet(r)

plot interactive, with ocean tiles and legend title

plet(
  r, 
  tiles = "Esri.OceanBasemap",
  main  = glue(
    "{region_season}
    {species_common}
    {yr}: {names(r)}"))

plot static

ggplot() +
  geom_tile(
    data = as.data.frame(r, xy = T) , 
    aes(x = x, y = y, fill = wtcpue)) +
  scale_fill_viridis_c() +
  coord_fixed() +
  labs(title = glue("{region_season} {species_common}, {yr}"))

3. Survey Locations

Now let’s use the arcgis R packages, which you can install with the following:

install.packages("arcgis", repos = "https://r-arcgis.r-universe.dev")
url_survey <- "https://services2.arcgis.com/C8EMgrsFcRFL6LrL/ArcGIS/rest/services/DisMAP_Survey_Info_CURRENT/FeatureServer/2"
lyr_survey <- arc_open(url_survey)
lyr_survey
#> <Table>
#> Name: DisMAP_Survey_Info
#> Capabilities: Query
tbl_survey <- arc_select(lyr_survey) |> 
  as_tibble()
datatable(tbl_survey)

region_season <- "NEUS_SPR"
year          <- 2015

url_surveypts <- glue("https://services2.arcgis.com/C8EMgrsFcRFL6LrL/arcgis/rest/services/{region_season}_Sample_Locations_CURRENT/FeatureServer")
fs <- arc_open(url_surveypts)
lyr <- get_layer(fs, 1)

# pts <- arc_read(lyr$url)
# nrow(pts)  # 592,012
f <- arc_open(lyr$url)
pts_yr <- arc_select(
  lyr, 
  where = glue("Year = {year}"))
# nrow(pts_yr)  # 15,228
datatable(pts_yr)
#> Warning in instance$preRenderHook(instance): It seems your data is too big for
#> client-side DataTables. You may consider server-side processing:
#> https://rstudio.github.io/DT/server.html

# Plot survey locations
ggplot() +
  geom_sf(data = pts_yr) +
  theme_minimal() +
  labs(title = "Northeast US Spring Survey Locations, 2015")

TODO: